InĀ [1]:
import io
import os
import requests
import zipfile

import earthpy as et
import geopandas as gpd
import holoviews as hv
import hvplot as hv
import hvplot.pandas
import hvplot.xarray
import numpy as np
from pyproj import CRS
import rasterio
import rasterio.features
from rasterio.crs import CRS
import rioxarray as rxr
import rioxarray.merge as rxrm
from shapely.geometry import shape

from utils.process_lidar import process_canopy_areas
from utils.process_lidar import process_lidar_to_canopy

# Prepare project directories
# This project uses the EarthPy directory as the root path.
# You can specify your own root directory and paths below.
data_dir = os.path.join(et.io.HOME, et.io.DATA_NAME, 'treebeard')
lidar_dir = os.path.join(data_dir, 'lidar_tile_scheme_2020')
lidar_las_dir = os.path.join(data_dir, 'las_files')
os.makedirs(data_dir, exist_ok=True)
os.makedirs(lidar_dir, exist_ok=True)
os.makedirs(lidar_las_dir, exist_ok=True)
InĀ [2]:
las_index_path = os.path.join(
    data_dir,
    lidar_dir,
    'lidar_index_cspn_q2.shp'
)

# Download LIDAR index tiles
# Specify the download URL for the LAS tile index if it exists
# This example is downloading LIDAR from DRCOG 2020 LIDAR located at this path:
# 'https://lidararchive.s3.amazonaws.com/2020_CSPN_Q2/'
# Modify this the URL below with a LIDAR tile index for a particular LIDAR project
if not os.path.exists(las_index_path):
    las_index_url = ('https://gisdata.drcog.org:8443/geoserver/DRCOGPUB/'
             'ows?service=WFS&version=1.0.0&request=GetFeature&'
             'typeName=DRCOGPUB:lidar_index_cspn_q2&outputFormat=SHAPE-ZIP')
    
    # Download the ZIP file
    response = requests.get(las_index_url)
    response.raise_for_status()  # Check that the request was successful

    # Extract the ZIP file
    with zipfile.ZipFile(io.BytesIO(response.content)) as zip_ref:
        zip_ref.extractall(lidar_dir)

las_index_gdf = (
    gpd.read_file(las_index_path).set_index('tile')
#    .loc[['N3W345']]
)

# Project to EPSG 4269 for plotting
las_index_gdf = las_index_gdf.to_crs('EPSG:4326')
crs = las_index_gdf.crs

las_index_plot = las_index_gdf.hvplot(
    tiles = 'OSM',
    crs=las_index_gdf.crs,
    geo = True,
    line_color='black',
    line_width=2,
    fill_alpha=0
)
las_index_plot
Out[2]:
InĀ [3]:
# Open project areas shapefile and plot
# Save your study area shapefile as zipfile at the path in proj_zip_path
proj_zip_path = 'assets/project_areas_merged.zip'

with zipfile.ZipFile(proj_zip_path, 'r') as zip_ref:
    temp_dir = '/tmp/extracted_shapefile'  # You can specify any temporary directory
    zip_ref.extractall(temp_dir)
    
extracted_shapefile_path = temp_dir + '/'

proj_area_gdf = gpd.read_file(extracted_shapefile_path)

proj_area_gdf = proj_area_gdf.to_crs("EPSG:4326")

proj_area_plot = proj_area_gdf.hvplot(
    x='x',
    y='y',
    aspect='equal',
    tiles='OSM',
    geo=True,
    line_color='red',
    line_width=2,
    fill_alpha=0
)

proj_area_plot
Out[3]:
InĀ [4]:
# Identify the tiles that intersect each project area
select_tiles_gdf = gpd.sjoin(las_index_gdf, proj_area_gdf, how='inner', predicate='intersects')

select_tiles_gdf.reset_index(drop=False)
select_tile_plot = select_tiles_gdf.hvplot(
    x='x',
    y='y',
    aspect='equal',
    geo=True,
    line_color='blue',
    line_width=2,
    fill_alpha=0,
    xaxis=None,
    yaxis=None
)

tile_proj_plot = select_tile_plot * proj_area_plot
#hv.save(tile_proj_plot, 'lidar_tile_plot.png')
tile_proj_plot
Out[4]:
InĀ [5]:
select_tiles_gdf = select_tiles_gdf.reset_index(drop=False)

# Generate list of all tiles per project area
tiles_by_area = select_tiles_gdf.groupby('Proj_ID')['tile'].apply(list).reset_index()
tiles_by_area
Out[5]:
Proj_ID tile
0 Conifer Hill [N4W399, N4W397, N4W389, N4W396, N4W388, N4W29...
1 Unnamed 1 [N4W264]
2 Unnamed 2 [N4W381, N4W391]
3 Zumwinkel [N4W351]
InĀ [9]:
# Download and process .las tiles for the desired study area
# This code assumes you have prepared a study area shapefile
# with a column called 'Proj_ID' that specifies the name of the study area.
# This is downloading .las files from the URL below. Adjust for a different LIDAR project
las_root_url = 'https://lidararchive.s3.amazonaws.com/2020_CSPN_Q2/'

# Set the project name here
proj_area = proj_area_gdf[proj_area_gdf['Proj_ID'] == 'Zumwinkel']

# Create output folder for study area being used
proj_area_name = proj_area['Proj_ID'].iloc[0]
lidar_download_path = os.path.join(data_dir, proj_area_name, "las_files")
if not os.path.exists(lidar_download_path):
    os.makedirs(lidar_download_path)

# Download .las tiles for study area
site_to_process = tiles_by_area[tiles_by_area['Proj_ID'] == proj_area_name].copy()
for index, row in site_to_process.iterrows():
    tiles = row['tile']
    sel_proj_area_gdf = proj_area_gdf[proj_area_gdf['Proj_ID'] == proj_area_name]
    # Download all tiles for project area, process, and clip/merge
    tile_agg = []
    print("Processing LIDAR for " + proj_area_name)
    for tile in tiles:
        file_name = tile + ".las"
        print("Processing LIDAR tile " + tile)
        tile_path = os.path.join(
            lidar_download_path,
            file_name
        )
        download_url = las_root_url + tile + ".las"
        if not os.path.exists(tile_path):
            # Download the LAS file
            response = requests.get(download_url)

            # Check if the request was successful
            if response.status_code == 200:
                with open(tile_path, 'wb') as file:
                    file.write(response.content)
                print(f"File downloaded successfully and saved to {tile_path}")
            else:
                print(f"Failed to download file. Status code: {response.status_code}")

canopy_gdf = process_lidar_to_canopy(proj_area, lidar_download_path, canopy_height=5)
Processing LIDAR for Zumwinkel
Processing LIDAR tile N4W351
'output' folder already exists at: C:\Users\Pete\earth-analytics\data\treebeard\Zumwinkel\las_files\output
C:\Users\Pete\earth-analytics\data\treebeard\Zumwinkel\las_files\N4W351.las
COMPOUNDCRS["NAD83(2011) / Colorado North (ftUS) + NAVD88 height - Geoid18 (ftUS)",PROJCRS["NAD83(2011) / Colorado North (ftUS)",BASEGEOGCRS["NAD83(2011)",DATUM["NAD83 (National Spatial Reference System 2011)",ELLIPSOID["GRS 1980",6378137,298.257222101,LENGTHUNIT["metre",1]]],PRIMEM["Greenwich",0,ANGLEUNIT["Degree",0.0174532925199433]],ID["EPSG",6318]],CONVERSION["unnamed",METHOD["Lambert Conic Conformal (2SP)",ID["EPSG",9802]],PARAMETER["Latitude of 1st standard parallel",40.7833333333333,ANGLEUNIT["Degree",0.0174532925199433],ID["EPSG",8823]],PARAMETER["Latitude of 2nd standard parallel",39.7166666666667,ANGLEUNIT["Degree",0.0174532925199433],ID["EPSG",8824]],PARAMETER["Latitude of false origin",39.3333333333333,ANGLEUNIT["Degree",0.0174532925199433],ID["EPSG",8821]],PARAMETER["Longitude of false origin",-105.5,ANGLEUNIT["Degree",0.0174532925199433],ID["EPSG",8822]],PARAMETER["Easting at false origin",3000000,LENGTHUNIT["US survey foot",0.304800609601219],ID["EPSG",8826]],PARAMETER["Northing at false origin",1000000,LENGTHUNIT["US survey foot",0.304800609601219],ID["EPSG",8827]]],CS[Cartesian,2],AXIS["x",east,ORDER[1],LENGTHUNIT["US survey foot",0.304800609601219]],AXIS["y",north,ORDER[2],LENGTHUNIT["US survey foot",0.304800609601219]],ID["EPSG",6430]],VERTCRS["NAVD88 height (ftUS)",VDATUM["North American Vertical Datum 1988"],CS[vertical,1],AXIS["up",up,LENGTHUNIT["US survey foot",0.304800609601219]],GEOIDMODEL["GEOID18"],ID["EPSG",6360]]]
.\whitebox_tools.exe --run="LidarIdwInterpolation" --input='C:\Users\Pete\earth-analytics\data\treebeard\Zumwinkel\las_files\N4W351.las' --output='C:\Users\Pete\earth-analytics\data\treebeard\Zumwinkel\las_files\N4W351_fr.tif' --parameter=elevation --returns=first --resolution=1.0 --weight=1.0 --radius=3.0 -v --compress_rasters=False

************************************
* Welcome to LidarIdwInterpolation *
* Powered by WhiteboxTools         *
* www.whiteboxgeo.com              *
************************************
Performing interpolation...
reading input LiDAR file...
Binning points: 0%
Binning points: 1%
Binning points: 2%
Binning points: 3%
Binning points: 4%
Binning points: 5%
Binning points: 6%
Binning points: 7%
Binning points: 8%
Binning points: 9%
Binning points: 10%
Binning points: 11%
Binning points: 12%
Binning points: 13%
Binning points: 14%
Binning points: 15%
Binning points: 16%
Binning points: 17%
Binning points: 18%
Binning points: 19%
Binning points: 20%
Binning points: 21%
Binning points: 22%
Binning points: 23%
Binning points: 24%
Binning points: 25%
Binning points: 26%
Binning points: 27%
Binning points: 28%
Binning points: 29%
Binning points: 30%
Binning points: 31%
Binning points: 32%
Binning points: 33%
Binning points: 34%
Binning points: 35%
Binning points: 36%
Binning points: 37%
Binning points: 38%
Binning points: 39%
Binning points: 40%
Binning points: 41%
Binning points: 42%
Binning points: 43%
Binning points: 44%
Binning points: 45%
Binning points: 46%
Binning points: 47%
Binning points: 48%
Binning points: 49%
Binning points: 50%
Binning points: 51%
Binning points: 52%
Binning points: 53%
Binning points: 54%
Binning points: 55%
Binning points: 56%
Binning points: 57%
Binning points: 58%
Binning points: 59%
Binning points: 60%
Binning points: 61%
Binning points: 62%
Binning points: 63%
Binning points: 64%
Binning points: 65%
Binning points: 66%
Binning points: 67%
Binning points: 68%
Binning points: 69%
Binning points: 70%
Binning points: 71%
Binning points: 72%
Binning points: 73%
Binning points: 74%
Binning points: 75%
Binning points: 76%
Binning points: 77%
Binning points: 78%
Binning points: 79%
Binning points: 80%
Binning points: 81%
Binning points: 82%
Binning points: 83%
Binning points: 84%
Binning points: 85%
Binning points: 86%
Binning points: 87%
Binning points: 88%
Binning points: 89%
Binning points: 90%
Binning points: 91%
Binning points: 92%
Binning points: 93%
Binning points: 94%
Binning points: 95%
Binning points: 96%
Binning points: 97%
Binning points: 98%
Binning points: 99%
Binning points: 100%
Progress: 0%
Progress: 1%
Progress: 2%
Progress: 3%
Progress: 4%
Progress: 5%
Progress: 6%
Progress: 7%
Progress: 8%
Progress: 9%
Progress: 10%
Progress: 11%
Progress: 12%
Progress: 13%
Progress: 14%
Progress: 15%
Progress: 16%
Progress: 17%
Progress: 18%
Progress: 19%
Progress: 20%
Progress: 21%
Progress: 22%
Progress: 23%
Progress: 24%
Progress: 25%
Progress: 26%
Progress: 27%
Progress: 28%
Progress: 29%
Progress: 30%
Progress: 31%
Progress: 32%
Progress: 33%
Progress: 34%
Progress: 35%
Progress: 36%
Progress: 37%
Progress: 38%
Progress: 39%
Progress: 40%
Progress: 41%
Progress: 42%
Progress: 43%
Progress: 44%
Progress: 45%
Progress: 46%
Progress: 47%
Progress: 48%
Progress: 49%
Progress: 50%
Progress: 51%
Progress: 52%
Progress: 53%
Progress: 54%
Progress: 55%
Progress: 56%
Progress: 57%
Progress: 58%
Progress: 59%
Progress: 60%
Progress: 61%
Progress: 62%
Progress: 63%
Progress: 64%
Progress: 65%
Progress: 66%
Progress: 67%
Progress: 68%
Progress: 69%
Progress: 70%
Progress: 71%
Progress: 72%
Progress: 73%
Progress: 74%
Progress: 75%
Progress: 76%
Progress: 77%
Progress: 78%
Progress: 79%
Progress: 80%
Progress: 81%
Progress: 82%
Progress: 83%
Progress: 84%
Progress: 85%
Progress: 86%
Progress: 87%
Progress: 88%
Progress: 89%
Progress: 90%
Progress: 91%
Progress: 92%
Progress: 93%
Progress: 94%
Progress: 95%
Progress: 96%
Progress: 97%
Progress: 98%
Progress: 99%
Progress: 100%
Saving data...
Finished C:\Users\Pete\earth-analytics\data\treebeard\Zumwinkel\las_files\N4W351 (1 of 1)
Progress: 0%
Elapsed Time (including I/O): 12.781s
.\whitebox_tools.exe --run="LidarIdwInterpolation" --input='C:\Users\Pete\earth-analytics\data\treebeard\Zumwinkel\las_files\N4W351.las' --output='C:\Users\Pete\earth-analytics\data\treebeard\Zumwinkel\las_files\N4W351_gr.tif' --parameter=elevation --returns=ground --resolution=1.0 --weight=1.0 --radius=3.0 -v --compress_rasters=False

************************************
* Welcome to LidarIdwInterpolation *
* Powered by WhiteboxTools         *
* www.whiteboxgeo.com              *
************************************
Performing interpolation...
reading input LiDAR file...
Binning points: 0%
Binning points: 1%
Binning points: 2%
Binning points: 3%
Binning points: 4%
Binning points: 5%
Binning points: 6%
Binning points: 7%
Binning points: 8%
Binning points: 9%
Binning points: 10%
Binning points: 11%
Binning points: 12%
Binning points: 13%
Binning points: 14%
Binning points: 15%
Binning points: 16%
Binning points: 17%
Binning points: 18%
Binning points: 19%
Binning points: 20%
Binning points: 21%
Binning points: 22%
Binning points: 23%
Binning points: 24%
Binning points: 25%
Binning points: 26%
Binning points: 27%
Binning points: 28%
Binning points: 29%
Binning points: 30%
Binning points: 31%
Binning points: 32%
Binning points: 33%
Binning points: 34%
Binning points: 35%
Binning points: 36%
Binning points: 37%
Binning points: 38%
Binning points: 39%
Binning points: 40%
Binning points: 41%
Binning points: 42%
Binning points: 43%
Binning points: 44%
Binning points: 45%
Binning points: 46%
Binning points: 47%
Binning points: 48%
Binning points: 49%
Binning points: 50%
Binning points: 51%
Binning points: 52%
Binning points: 53%
Binning points: 54%
Binning points: 55%
Binning points: 56%
Binning points: 57%
Binning points: 58%
Binning points: 59%
Binning points: 60%
Binning points: 61%
Binning points: 62%
Binning points: 63%
Binning points: 64%
Binning points: 65%
Binning points: 66%
Binning points: 67%
Binning points: 68%
Binning points: 69%
Binning points: 70%
Binning points: 71%
Binning points: 72%
Binning points: 73%
Binning points: 74%
Binning points: 75%
Binning points: 76%
Binning points: 77%
Binning points: 78%
Binning points: 79%
Binning points: 80%
Binning points: 81%
Binning points: 82%
Binning points: 83%
Binning points: 84%
Binning points: 85%
Binning points: 86%
Binning points: 87%
Binning points: 88%
Binning points: 89%
Binning points: 90%
Binning points: 91%
Binning points: 92%
Binning points: 93%
Binning points: 94%
Binning points: 95%
Binning points: 96%
Binning points: 97%
Binning points: 98%
Binning points: 99%
Binning points: 100%
Progress: 0%
Progress: 1%
Progress: 2%
Progress: 3%
Progress: 4%
Progress: 5%
Progress: 6%
Progress: 7%
Progress: 8%
Progress: 9%
Progress: 10%
Progress: 11%
Progress: 12%
Progress: 13%
Progress: 14%
Progress: 15%
Progress: 16%
Progress: 17%
Progress: 18%
Progress: 19%
Progress: 20%
Progress: 21%
Progress: 22%
Progress: 23%
Progress: 24%
Progress: 25%
Progress: 26%
Progress: 27%
Progress: 28%
Progress: 29%
Progress: 30%
Progress: 31%
Progress: 32%
Progress: 33%
Progress: 34%
Progress: 35%
Progress: 36%
Progress: 37%
Progress: 38%
Progress: 39%
Progress: 40%
Progress: 41%
Progress: 42%
Progress: 43%
Progress: 44%
Progress: 45%
Progress: 46%
Progress: 47%
Progress: 48%
Progress: 49%
Progress: 50%
Progress: 51%
Progress: 52%
Progress: 53%
Progress: 54%
Progress: 55%
Progress: 56%
Progress: 57%
Progress: 58%
Progress: 59%
Progress: 60%
Progress: 61%
Progress: 62%
Progress: 63%
Progress: 64%
Progress: 65%
Progress: 66%
Progress: 67%
Progress: 68%
Progress: 69%
Progress: 70%
Progress: 71%
Progress: 72%
Progress: 73%
Progress: 74%
Progress: 75%
Progress: 76%
Progress: 77%
Progress: 78%
Progress: 79%
Progress: 80%
Progress: 81%
Progress: 82%
Progress: 83%
Progress: 84%
Progress: 85%
Progress: 86%
Progress: 87%
Progress: 88%
Progress: 89%
Progress: 90%
Progress: 91%
Progress: 92%
Progress: 93%
Progress: 94%
Progress: 95%
Progress: 96%
Progress: 97%
Progress: 98%
Progress: 99%
Progress: 100%
Saving data...
Finished C:\Users\Pete\earth-analytics\data\treebeard\Zumwinkel\las_files\N4W351 (1 of 1)
Progress: 0%
Elapsed Time (including I/O): 15.495s
InĀ [10]:
# Process output shapefiles

output_path = os.path.join(data_dir, proj_area_name, "processed_output_files")

if not os.path.exists(output_path):
    os.makedirs(output_path)

process_canopy_areas(canopy_gdf, proj_area, output_path, buffer_distance=5)
c:\Users\Pete\Documents\GitHub\treebeard\utils\process_lidar.py:94: UserWarning: Column names longer than 10 characters will be truncated when saved to ESRI Shapefile.
  exploded_gap_gdf.to_file(canopy_gaps_calced_path)
c:\Users\Pete\miniconda3\envs\treebeard\lib\site-packages\pyogrio\raw.py:709: RuntimeWarning: Normalized/laundered field name: 'Gap_Size_Category' to 'Gap_Size_C'
  ogr_write(
InĀ [12]:
# Plot outputs of canopy and buffered gaps

gaps_filename = "lidar_" + proj_area_name + "_canopy_gaps_calced.shp"
gaps_shp_path = os.path.join(output_path, gaps_filename)
canopy_gaps_calced = gpd.read_file(gaps_shp_path)
canopy_gaps_calced_to_plot = canopy_gaps_calced.to_crs("EPSG:4326")
gaps_plot = canopy_gaps_calced_to_plot.hvplot(
    x='x',
    y='y',
    aspect='equal',
    geo=True,
    line_color='blue',
    line_width=2,
    fill_alpha=.5,
    width = 600,
    height=600,
    tiles = 'EsriImagery',
    title = "Processed Canopy Gaps from LIDAR with 5' Buffer"
)

canopy_gdf_to_plot = canopy_gdf.to_crs("EPSG:4326")
canopy_plot = canopy_gdf_to_plot.hvplot(
    x='x',
    y='y',
    aspect='equal',
    geo=True,
    line_color='blue',
    line_width=2,
    fill_alpha=.5,
    width = 600,
    height=600,
    tiles = 'EsriImagery',
    title = "Processed Canopy from LIDAR"
)

gaps_plot + canopy_plot
Out[12]:
InĀ [5]:
%%bash
jupyter nbconvert --to html lidar.ipynb
indows Subsystem for Linux has no installed distributions.

Use 'wsl.exe --list --online' to list available distributions
and 'wsl.exe --install <Distro>' to install.

Distributions can also be installed by visiting the Microsoft Store:
https://aka.ms/wslstore
Error code: Bash/Service/CreateInstance/GetDefaultDistro/WSL_E_DEFAULT_DISTRO_NOT_FOUND
---------------------------------------------------------------------------
CalledProcessError                        Traceback (most recent call last)
Cell In[5], line 1
----> 1 get_ipython().run_cell_magic('bash', '', 'jupyter nbconvert --to html lidar.ipynb\n')

File c:\Users\Pete\miniconda3\envs\treebeard\lib\site-packages\IPython\core\interactiveshell.py:2541, in InteractiveShell.run_cell_magic(self, magic_name, line, cell)
   2539 with self.builtin_trap:
   2540     args = (magic_arg_s, cell)
-> 2541     result = fn(*args, **kwargs)
   2543 # The code below prevents the output from being displayed
   2544 # when using magics with decorator @output_can_be_silenced
   2545 # when the last Python token in the expression is a ';'.
   2546 if getattr(fn, magic.MAGIC_OUTPUT_CAN_BE_SILENCED, False):

File c:\Users\Pete\miniconda3\envs\treebeard\lib\site-packages\IPython\core\magics\script.py:155, in ScriptMagics._make_script_magic.<locals>.named_script_magic(line, cell)
    153 else:
    154     line = script
--> 155 return self.shebang(line, cell)

File c:\Users\Pete\miniconda3\envs\treebeard\lib\site-packages\IPython\core\magics\script.py:315, in ScriptMagics.shebang(self, line, cell)
    310 if args.raise_error and p.returncode != 0:
    311     # If we get here and p.returncode is still None, we must have
    312     # killed it but not yet seen its return code. We don't wait for it,
    313     # in case it's stuck in uninterruptible sleep. -9 = SIGKILL
    314     rc = p.returncode or -9
--> 315     raise CalledProcessError(rc, cell)

CalledProcessError: Command 'b'jupyter nbconvert --to html lidar.ipynb\n'' returned non-zero exit status 1.